Parameters

The following parameters determine the regional coverage of the forecast and the positions for which we plot detailed time series. They also determine the location of (temporary) data files, and details for the parallelization with Dask.

In [1]:
# parameters

# regional coverage
lat_min, lat_max = -20, 30
lon_min, lon_max = 310, 20

# good forecasts reach
good_forecast_days = 7  # 7 days

# Platforms and buoys to be plotted
selected_platforms = ['SO']
selected_buoys = [300034013902340]
selected_glider = ['ifm14']
drift_buoy_drop_history_before = "2021-07-10"

# additional buoy positions to be plotted
# The ones found in the downloaded data will be shown anyway.
added_stations = [
    {"kind": "buoy", "name": "Pirata Buoy", "lat": 20.0, "lon": -38.0},
    {"kind": "buoy", "name": "Pirata Buoy", "lat": 15.0, "lon": -38.0},
    {"kind": "buoy", "name": "Pirata Buoy", "lat": 21.0, "lon": -23.0},
    {"kind": "buoy", "name": "Pirata Buoy", "lat": 12.0, "lon": -23.0},
    {"kind": "buoy", "name": "Pirata Buoy", "lat": -6.0, "lon": -10.0},
    {"kind": "buoy", "name": "Pirata Buoy", "lat": -10.0 ,"lon": -10.0},
    {"kind": "poi", "name": "Seamount Annan", "lat": 9.25, "lon": -21.333},
    {"kind": "poi", "name": "Seamount Carter", "lat": 9.0, "lon": -20.33}
]

# data files
GFS_zarr_store = "tmp_GFS.zarr"
slab_zarr_store = "tmp_slab.zarr"
buoy_file_name = "tmp_buoy_data"
buoy_positions_file = "tmp_buoy_positions.csv"
drift_buoy_history_file = "tmp_buoy_history.csv"
mimoc_mld_file = "tmp_mimoc_mld.nc"
platforms_file = "data/platforms.csv"
gliders_file = "data/gliders.csv"

# dask specifics
dask_kwargs = {"n_workers": 1, "threads_per_worker": 2, "memory_limit": 6e9}

Technial Preamble

Before doing any calculations, we'll need to import a few modules. We'll also start a Dask cluster for parallel execution.

In [2]:
# dask
from dask.distributed import Client

# plotting
from bokeh.models.formatters import DatetimeTickFormatter
import cartopy.crs as ccrs
import cmocean
import geoviews as gv
import holoviews as hv
import hvplot.xarray, hvplot.pandas

# numerics
import numpy as np
import pandas as pd
import xarray as xr

# aux
from functools import reduce
from operator import add

import requests
In [3]:
# create Dask cluster
client = Client(**dask_kwargs)
client
Out[3]:

Client

Cluster

  • Workers: 1
  • Cores: 2
  • Memory: 6.00 GB

Get platform data

In [4]:
platform_positions = pd.read_csv(platforms_file, index_col=[0, ]).loc[selected_platforms]
platform_positions["longitude"] = platform_positions["longitude"] % 360.0

platform_positions["kind"] = "vessel"
platform_positions["name"] = platform_positions["platform"].apply(lambda s: f"R/V {s}")

platform_positions = platform_positions.rename(columns={
    "longitude": "lon",
    "latitude": "lat",
})

platform_positions
Out[4]:
platform leg time lat lon kind name
SO SO SO285-cancelled 2021-09-05 22:16:42 -10.430254 346.471192 vessel R/V SO

Get glider data

In [5]:
glider_positions = pd.read_csv(gliders_file, index_col=[0, ]).loc[selected_glider]
glider_positions["longitude"] = glider_positions["longitude"] % 360.0

glider_positions["kind"] = "glider"
glider_positions["name"] = glider_positions["platform"]

glider_positions = glider_positions.rename(columns={
    "longitude": "lon",
    "latitude": "lat",
})

glider_positions["name"] = glider_positions["name"].apply(lambda s: f"Glider {s}")

glider_positions
Out[5]:
time lat lon platform kind name
ifm14 2021-08-04 09:02:41 9.2725 340.5761 ifm14 glider Glider ifm14

Get buoy locations from the buoy data set

In [6]:
buoy_positions = pd.read_csv(buoy_positions_file)
buoy_positions = buoy_positions.query(('@lat_min <= lat <= @lat_max') and ('@lon_min <= lon <= @lon_max'))
buoy_positions["kind"] = "buoy"
buoy_positions["name"] = "Pirata Buoy"
buoy_positions.head(3)
Out[6]:
lat lon kind name

Get drifting buoy locations

In [7]:
def get_drifting_buoy_info(buoy: int):
    """Get current status for drifting buoy.
    
    Parameters
    ----------
    buoy : int
        Buoy ID.
        
    Returns
    -------
    dict
        "name": Buoy id.
        "lon": Longitude.
        "lat": Latitude.
        "time": Time stamp of position.

    """
    buoy_info = requests.get(
            f"https://data.geomar.de/realtime/data/project/{buoy}/{buoy}_pos.json",
        ).json()
    
    time, lat, lon = buoy_info
    
    return {
        "name": 'db' + str(buoy)[-4:],
        "time": time,
        "lat": lat,
        "lon": lon,
    }

def get_all_buoys_df(buoys: list = None):
    return pd.DataFrame(
        {
        pfname: get_drifting_buoy_info(pfname) for pfname in buoys
    }).T

drift_buoy_positions = get_all_buoys_df(selected_buoys)
drift_buoy_positions.to_csv("data/d_buoys.csv", index = False)
In [8]:
drift_buoy_positions = pd.read_csv("data/d_buoys.csv")
drift_buoy_positions['name'] = drift_buoy_positions['name'].apply(str)
drift_buoy_positions["kind"] = "drifting buoy"
drift_buoy_positions.head(3)
Out[8]:
lat lon name time kind
0 9.17565 -18.34721 db2340 2021-08-04T01:13:00 drifting buoy

Cast additional positions into dataframe

In [9]:
added_stations = pd.DataFrame(added_stations)
added_stations.head(3)
Out[9]:
kind name lat lon
0 buoy Pirata Buoy 20.0 -38.0
1 buoy Pirata Buoy 15.0 -38.0
2 buoy Pirata Buoy 21.0 -23.0

Combine all positions

In [10]:
all_positions = pd.concat((
    df.reset_index().drop(columns=["index", ]) 
    for df in [
        added_stations, buoy_positions, drift_buoy_positions, platform_positions, glider_positions,
    ]
))
all_positions["lon"] %= 360.0

all_positions["lat"] = all_positions["lat"].round(decimals=3)
all_positions["lon"] = all_positions["lon"].round(decimals=3)

all_positions = all_positions.set_index(pd.Series(range(len(all_positions))))

all_positions.head(3)
Out[10]:
kind name lat lon time platform leg
0 buoy Pirata Buoy 20.0 322.0 NaN NaN NaN
1 buoy Pirata Buoy 15.0 322.0 NaN NaN NaN
2 buoy Pirata Buoy 21.0 337.0 NaN NaN NaN

Load the GFS and slab-model data

In [11]:
ds_GFS = xr.open_zarr(GFS_zarr_store)
ds_slab = xr.open_zarr(slab_zarr_store)
ds_mld = xr.open_dataset(mimoc_mld_file)

Find start of forecast period

We'll need the time stamp of the start of the forecasting data.

In [12]:
start_of_forecast = (~ds_GFS["is_forecast"].astype(bool)).sum().compute().data
start_of_forecast = ds_GFS["time"].data[max(0, start_of_forecast-1)]
print(start_of_forecast)
2021-09-05T06:00:00.000000000
In [13]:
good_forecast_time = np.timedelta64(good_forecast_days, "D")

Restrict regionally

In [14]:
ds_GFS = ds_GFS.roll(lon=(ds_slab.dims['lon'] // 2)).sel(
    lat=slice(lat_max, lat_min),
    lon=slice(lon_min, lon_max),
)
ds_slab = ds_slab.roll(lon=(ds_slab.dims['lon'] // 2)).sel(
    lat=slice(lat_max, lat_min),
    lon=slice(lon_min, lon_max),
)
/srv/conda/envs/notebook/lib/python3.7/site-packages/ipykernel_launcher.py:1: FutureWarning: roll_coords will be set to False in the future. Explicitly set roll_coords to silence warning.
  """Entry point for launching an IPython kernel.
/srv/conda/envs/notebook/lib/python3.7/site-packages/ipykernel_launcher.py:5: FutureWarning: roll_coords will be set to False in the future. Explicitly set roll_coords to silence warning.
  """
In [15]:
ds_GFS
Out[15]:
Show/Hide data repr Show/Hide attributes
xarray.Dataset
    • lat: 101
    • lon: 141
    • time: 239
    • lat
      (lat)
      float32
      30.0 29.5 29.0 ... -19.5 -20.0
      _CoordinateAxisType :
      Lat
      units :
      degrees_north
      array([ 30. ,  29.5,  29. ,  28.5,  28. ,  27.5,  27. ,  26.5,  26. ,  25.5,
              25. ,  24.5,  24. ,  23.5,  23. ,  22.5,  22. ,  21.5,  21. ,  20.5,
              20. ,  19.5,  19. ,  18.5,  18. ,  17.5,  17. ,  16.5,  16. ,  15.5,
              15. ,  14.5,  14. ,  13.5,  13. ,  12.5,  12. ,  11.5,  11. ,  10.5,
              10. ,   9.5,   9. ,   8.5,   8. ,   7.5,   7. ,   6.5,   6. ,   5.5,
               5. ,   4.5,   4. ,   3.5,   3. ,   2.5,   2. ,   1.5,   1. ,   0.5,
               0. ,  -0.5,  -1. ,  -1.5,  -2. ,  -2.5,  -3. ,  -3.5,  -4. ,  -4.5,
              -5. ,  -5.5,  -6. ,  -6.5,  -7. ,  -7.5,  -8. ,  -8.5,  -9. ,  -9.5,
             -10. , -10.5, -11. , -11.5, -12. , -12.5, -13. , -13.5, -14. , -14.5,
             -15. , -15.5, -16. , -16.5, -17. , -17.5, -18. , -18.5, -19. , -19.5,
             -20. ], dtype=float32)
    • lon
      (lon)
      float32
      310.0 310.5 311.0 ... 19.5 20.0
      _CoordinateAxisType :
      Lon
      units :
      degrees_east
      array([310. , 310.5, 311. , 311.5, 312. , 312.5, 313. , 313.5, 314. , 314.5,
             315. , 315.5, 316. , 316.5, 317. , 317.5, 318. , 318.5, 319. , 319.5,
             320. , 320.5, 321. , 321.5, 322. , 322.5, 323. , 323.5, 324. , 324.5,
             325. , 325.5, 326. , 326.5, 327. , 327.5, 328. , 328.5, 329. , 329.5,
             330. , 330.5, 331. , 331.5, 332. , 332.5, 333. , 333.5, 334. , 334.5,
             335. , 335.5, 336. , 336.5, 337. , 337.5, 338. , 338.5, 339. , 339.5,
             340. , 340.5, 341. , 341.5, 342. , 342.5, 343. , 343.5, 344. , 344.5,
             345. , 345.5, 346. , 346.5, 347. , 347.5, 348. , 348.5, 349. , 349.5,
             350. , 350.5, 351. , 351.5, 352. , 352.5, 353. , 353.5, 354. , 354.5,
             355. , 355.5, 356. , 356.5, 357. , 357.5, 358. , 358.5, 359. , 359.5,
               0. ,   0.5,   1. ,   1.5,   2. ,   2.5,   3. ,   3.5,   4. ,   4.5,
               5. ,   5.5,   6. ,   6.5,   7. ,   7.5,   8. ,   8.5,   9. ,   9.5,
              10. ,  10.5,  11. ,  11.5,  12. ,  12.5,  13. ,  13.5,  14. ,  14.5,
              15. ,  15.5,  16. ,  16.5,  17. ,  17.5,  18. ,  18.5,  19. ,  19.5,
              20. ], dtype=float32)
    • time
      (time)
      datetime64[ns]
      2021-08-06 ... 2021-09-21T12:00:00
      _CoordinateAxisType :
      Time
      long_name :
      GRIB forecast or observation time
      standard_name :
      time
      array(['2021-08-06T00:00:00.000000000', '2021-08-06T06:00:00.000000000',
             '2021-08-06T12:00:00.000000000', ..., '2021-09-21T06:00:00.000000000',
             '2021-09-21T09:00:00.000000000', '2021-09-21T12:00:00.000000000'],
            dtype='datetime64[ns]')
    • SLP
      (time, lat, lon)
      float32
      dask.array<chunksize=(239, 80, 40), meta=np.ndarray>
      Grib2_Generating_Process_Type :
      Analysis
      Grib2_Level_Desc :
      Ground or water surface
      Grib2_Level_Type :
      1
      Grib2_Parameter :
      [0, 3, 0]
      Grib2_Parameter_Category :
      Mass
      Grib2_Parameter_Discipline :
      Meteorological products
      Grib2_Parameter_Name :
      Pressure
      Grib_Variable_Id :
      VAR_0-3-0_L1
      abbreviation :
      PRES
      grid_mapping :
      LatLon_Projection
      long_name :
      Pressure @ Ground or water surface
      units :
      hPa
      Array Chunk
      Bytes 13.61 MB 7.65 MB
      Shape (239, 101, 141) (239, 80, 100)
      Count 199 Tasks 6 Chunks
      Type float32 numpy.ndarray
      141 101 239
    • U20
      (time, lat, lon)
      float32
      dask.array<chunksize=(239, 80, 40), meta=np.ndarray>
      Grib2_Generating_Process_Type :
      Analysis
      Grib2_Level_Desc :
      Specified height level above ground
      Grib2_Level_Type :
      103
      Grib2_Parameter :
      [0, 2, 2]
      Grib2_Parameter_Category :
      Momentum
      Grib2_Parameter_Discipline :
      Meteorological products
      Grib2_Parameter_Name :
      u-component of wind
      Grib_Variable_Id :
      VAR_0-2-2_L103
      abbreviation :
      UGRD
      grid_mapping :
      LatLon_Projection
      long_name :
      u-component of wind @ Specified height level above ground
      units :
      m/s
      Array Chunk
      Bytes 13.61 MB 7.65 MB
      Shape (239, 101, 141) (239, 80, 100)
      Count 199 Tasks 6 Chunks
      Type float32 numpy.ndarray
      141 101 239
    • V20
      (time, lat, lon)
      float32
      dask.array<chunksize=(239, 80, 40), meta=np.ndarray>
      Grib2_Generating_Process_Type :
      Analysis
      Grib2_Level_Desc :
      Specified height level above ground
      Grib2_Level_Type :
      103
      Grib2_Parameter :
      [0, 2, 3]
      Grib2_Parameter_Category :
      Momentum
      Grib2_Parameter_Discipline :
      Meteorological products
      Grib2_Parameter_Name :
      v-component of wind
      Grib_Variable_Id :
      VAR_0-2-3_L103
      abbreviation :
      VGRD
      grid_mapping :
      LatLon_Projection
      long_name :
      v-component of wind @ Specified height level above ground
      units :
      m/s
      Array Chunk
      Bytes 13.61 MB 7.65 MB
      Shape (239, 101, 141) (239, 80, 100)
      Count 199 Tasks 6 Chunks
      Type float32 numpy.ndarray
      141 101 239
    • is_forecast
      (time)
      bool
      dask.array<chunksize=(239,), meta=np.ndarray>
      Array Chunk
      Bytes 239 B 239 B
      Shape (239,) (239,)
      Count 2 Tasks 1 Chunks
      Type bool numpy.ndarray
      239 1
    • taux
      (time, lat, lon)
      float32
      dask.array<chunksize=(239, 80, 40), meta=np.ndarray>
      Array Chunk
      Bytes 13.61 MB 7.65 MB
      Shape (239, 101, 141) (239, 80, 100)
      Count 199 Tasks 6 Chunks
      Type float32 numpy.ndarray
      141 101 239
    • tauy
      (time, lat, lon)
      float32
      dask.array<chunksize=(239, 80, 40), meta=np.ndarray>
      Array Chunk
      Bytes 13.61 MB 7.65 MB
      Shape (239, 101, 141) (239, 80, 100)
      Count 199 Tasks 6 Chunks
      Type float32 numpy.ndarray
      141 101 239
In [16]:
ds_slab
Out[16]:
Show/Hide data repr Show/Hide attributes
xarray.Dataset
    • lat: 101
    • lon: 141
    • time: 239
    • lat
      (lat)
      float32
      30.0 29.5 29.0 ... -19.5 -20.0
      _CoordinateAxisType :
      Lat
      units :
      degrees_north
      array([ 30. ,  29.5,  29. ,  28.5,  28. ,  27.5,  27. ,  26.5,  26. ,  25.5,
              25. ,  24.5,  24. ,  23.5,  23. ,  22.5,  22. ,  21.5,  21. ,  20.5,
              20. ,  19.5,  19. ,  18.5,  18. ,  17.5,  17. ,  16.5,  16. ,  15.5,
              15. ,  14.5,  14. ,  13.5,  13. ,  12.5,  12. ,  11.5,  11. ,  10.5,
              10. ,   9.5,   9. ,   8.5,   8. ,   7.5,   7. ,   6.5,   6. ,   5.5,
               5. ,   4.5,   4. ,   3.5,   3. ,   2.5,   2. ,   1.5,   1. ,   0.5,
               0. ,  -0.5,  -1. ,  -1.5,  -2. ,  -2.5,  -3. ,  -3.5,  -4. ,  -4.5,
              -5. ,  -5.5,  -6. ,  -6.5,  -7. ,  -7.5,  -8. ,  -8.5,  -9. ,  -9.5,
             -10. , -10.5, -11. , -11.5, -12. , -12.5, -13. , -13.5, -14. , -14.5,
             -15. , -15.5, -16. , -16.5, -17. , -17.5, -18. , -18.5, -19. , -19.5,
             -20. ], dtype=float32)
    • lon
      (lon)
      float32
      310.0 310.5 311.0 ... 19.5 20.0
      _CoordinateAxisType :
      Lon
      units :
      degrees_east
      array([310. , 310.5, 311. , 311.5, 312. , 312.5, 313. , 313.5, 314. , 314.5,
             315. , 315.5, 316. , 316.5, 317. , 317.5, 318. , 318.5, 319. , 319.5,
             320. , 320.5, 321. , 321.5, 322. , 322.5, 323. , 323.5, 324. , 324.5,
             325. , 325.5, 326. , 326.5, 327. , 327.5, 328. , 328.5, 329. , 329.5,
             330. , 330.5, 331. , 331.5, 332. , 332.5, 333. , 333.5, 334. , 334.5,
             335. , 335.5, 336. , 336.5, 337. , 337.5, 338. , 338.5, 339. , 339.5,
             340. , 340.5, 341. , 341.5, 342. , 342.5, 343. , 343.5, 344. , 344.5,
             345. , 345.5, 346. , 346.5, 347. , 347.5, 348. , 348.5, 349. , 349.5,
             350. , 350.5, 351. , 351.5, 352. , 352.5, 353. , 353.5, 354. , 354.5,
             355. , 355.5, 356. , 356.5, 357. , 357.5, 358. , 358.5, 359. , 359.5,
               0. ,   0.5,   1. ,   1.5,   2. ,   2.5,   3. ,   3.5,   4. ,   4.5,
               5. ,   5.5,   6. ,   6.5,   7. ,   7.5,   8. ,   8.5,   9. ,   9.5,
              10. ,  10.5,  11. ,  11.5,  12. ,  12.5,  13. ,  13.5,  14. ,  14.5,
              15. ,  15.5,  16. ,  16.5,  17. ,  17.5,  18. ,  18.5,  19. ,  19.5,
              20. ], dtype=float32)
    • time
      (time)
      datetime64[ns]
      2021-08-06 ... 2021-09-21T11:00:00
      array(['2021-08-06T00:00:00.000000000', '2021-08-06T06:00:00.000000000',
             '2021-08-06T12:00:00.000000000', ..., '2021-09-21T06:00:00.000000000',
             '2021-09-21T09:00:00.000000000', '2021-09-21T11:00:00.000000000'],
            dtype='datetime64[ns]')
    • u_slab
      (lat, lon, time)
      float32
      dask.array<chunksize=(80, 40, 239), meta=np.ndarray>
      Array Chunk
      Bytes 13.61 MB 7.65 MB
      Shape (101, 141, 239) (80, 100, 239)
      Count 199 Tasks 6 Chunks
      Type float32 numpy.ndarray
      239 141 101
    • umag_slab
      (lat, lon, time)
      float32
      dask.array<chunksize=(80, 40, 239), meta=np.ndarray>
      Array Chunk
      Bytes 13.61 MB 7.65 MB
      Shape (101, 141, 239) (80, 100, 239)
      Count 199 Tasks 6 Chunks
      Type float32 numpy.ndarray
      239 141 101
    • v_slab
      (lat, lon, time)
      float32
      dask.array<chunksize=(80, 40, 239), meta=np.ndarray>
      Array Chunk
      Bytes 13.61 MB 7.65 MB
      Shape (101, 141, 239) (80, 100, 239)
      Count 199 Tasks 6 Chunks
      Type float32 numpy.ndarray
      239 141 101
  • slab_model_H :
    1
In [17]:
if lon_min < lon_max:
    all_positions = all_positions[
        all_positions["lat"].between(lat_min, lat_max)
        & all_positions["lon"].between(lon_min, lon_max)
    ]
else:
    all_positions = all_positions[
        all_positions["lat"].between(lat_min, lat_max)
        & ~all_positions["lon"].between(min(lon_min, lon_max), max(lon_min, lon_max))
    ]
In [18]:
all_positions
Out[18]:
kind name lat lon time platform leg
0 buoy Pirata Buoy 20.000 322.000 NaN NaN NaN
1 buoy Pirata Buoy 15.000 322.000 NaN NaN NaN
2 buoy Pirata Buoy 21.000 337.000 NaN NaN NaN
3 buoy Pirata Buoy 12.000 337.000 NaN NaN NaN
4 buoy Pirata Buoy -6.000 350.000 NaN NaN NaN
5 buoy Pirata Buoy -10.000 350.000 NaN NaN NaN
6 poi Seamount Annan 9.250 338.667 NaN NaN NaN
7 poi Seamount Carter 9.000 339.670 NaN NaN NaN
8 drifting buoy db2340 9.176 341.653 2021-08-04T01:13:00 NaN NaN
9 vessel R/V SO -10.430 346.471 2021-09-05 22:16:42 SO SO285-cancelled
10 glider Glider ifm14 9.272 340.576 2021-08-04 09:02:41 ifm14 NaN

Load drift buoy history

In [19]:
db_history = pd.read_csv(drift_buoy_history_file).set_index("Time")
db_history = db_history.loc[drift_buoy_drop_history_before:]
db_history
Out[19]:
Latitude Longitude
Time
2021-07-10 00:00:00 NaN NaN
2021-07-10 01:00:00 NaN NaN
2021-07-10 02:00:00 NaN NaN
2021-07-10 03:00:00 NaN NaN
2021-07-10 04:00:00 NaN NaN
... ... ...
2021-08-03 21:00:00 9.193695 -18.348345
2021-08-03 22:00:00 9.188245 -18.348960
2021-08-03 23:00:00 9.183130 -18.348437
2021-08-04 00:00:00 9.178410 -18.348040
2021-08-04 01:00:00 9.175650 -18.347210

602 rows × 2 columns

Prepare MLD data

In [20]:
years_data = list(ds_slab.time.groupby("time.year").groups.keys())
years_min = min(years_data)
years_max = min(years_data)
duration_years_data = years_max - years_min + 1

# pad by one
years_min -= 1
duration_years_data += 2

mld_time_coord_lower = xr.DataArray(
    [
        np.datetime64(f"{years_min + m // 12:04d}-{(m % 12) + 1:02d}-01")
        for m in range(0, duration_years_data * 12)
    ],
    dims=("time", )
)
mld_time_coord_upper = xr.DataArray(
    [
        np.datetime64(f"{years_min + m // 12:04d}-{(m % 12) + 1:02d}-01")
        for m in range(1, duration_years_data * 12 + 1)
    ],
    dims=("time", )
)
mld_time_coord = (
    mld_time_coord_lower
    + (mld_time_coord_upper - mld_time_coord_lower) / 2.0
)
display(mld_time_coord)
Show/Hide data repr Show/Hide attributes
xarray.DataArray
  • time: 36
  • 2020-01-16T12:00:00 2020-02-15T12:00:00 ... 2022-12-16T12:00:00
    array(['2020-01-16T12:00:00.000000000', '2020-02-15T12:00:00.000000000',
           '2020-03-16T12:00:00.000000000', '2020-04-16T00:00:00.000000000',
           '2020-05-16T12:00:00.000000000', '2020-06-16T00:00:00.000000000',
           '2020-07-16T12:00:00.000000000', '2020-08-16T12:00:00.000000000',
           '2020-09-16T00:00:00.000000000', '2020-10-16T12:00:00.000000000',
           '2020-11-16T00:00:00.000000000', '2020-12-16T12:00:00.000000000',
           '2021-01-16T12:00:00.000000000', '2021-02-15T00:00:00.000000000',
           '2021-03-16T12:00:00.000000000', '2021-04-16T00:00:00.000000000',
           '2021-05-16T12:00:00.000000000', '2021-06-16T00:00:00.000000000',
           '2021-07-16T12:00:00.000000000', '2021-08-16T12:00:00.000000000',
           '2021-09-16T00:00:00.000000000', '2021-10-16T12:00:00.000000000',
           '2021-11-16T00:00:00.000000000', '2021-12-16T12:00:00.000000000',
           '2022-01-16T12:00:00.000000000', '2022-02-15T00:00:00.000000000',
           '2022-03-16T12:00:00.000000000', '2022-04-16T00:00:00.000000000',
           '2022-05-16T12:00:00.000000000', '2022-06-16T00:00:00.000000000',
           '2022-07-16T12:00:00.000000000', '2022-08-16T12:00:00.000000000',
           '2022-09-16T00:00:00.000000000', '2022-10-16T12:00:00.000000000',
           '2022-11-16T00:00:00.000000000', '2022-12-16T12:00:00.000000000'],
          dtype='datetime64[ns]')
    In [21]:
    mld_expand = xr.concat(
        [ds_mld for n in range(duration_years_data)],
        dim="month"
    ).rename({"month": "time"})
    mld_expand.coords["time"] = mld_time_coord
    mld_expand
    
    Out[21]:
    Show/Hide data repr Show/Hide attributes
    xarray.Dataset
      • lat: 341
      • lon: 720
      • time: 36
      • lon
        (lon)
        float32
        0.0 0.5 1.0 ... 358.5 359.0 359.5
        array([  0. ,   0.5,   1. , ..., 358.5, 359. , 359.5], dtype=float32)
      • lat
        (lat)
        float32
        -80.0 -79.5 -79.0 ... 89.5 90.0
        array([-80. , -79.5, -79. , ...,  89. ,  89.5,  90. ], dtype=float32)
      • time
        (time)
        datetime64[ns]
        2020-01-16T12:00:00 ... 2022-12-16T12:00:00
        array(['2020-01-16T12:00:00.000000000', '2020-02-15T12:00:00.000000000',
               '2020-03-16T12:00:00.000000000', '2020-04-16T00:00:00.000000000',
               '2020-05-16T12:00:00.000000000', '2020-06-16T00:00:00.000000000',
               '2020-07-16T12:00:00.000000000', '2020-08-16T12:00:00.000000000',
               '2020-09-16T00:00:00.000000000', '2020-10-16T12:00:00.000000000',
               '2020-11-16T00:00:00.000000000', '2020-12-16T12:00:00.000000000',
               '2021-01-16T12:00:00.000000000', '2021-02-15T00:00:00.000000000',
               '2021-03-16T12:00:00.000000000', '2021-04-16T00:00:00.000000000',
               '2021-05-16T12:00:00.000000000', '2021-06-16T00:00:00.000000000',
               '2021-07-16T12:00:00.000000000', '2021-08-16T12:00:00.000000000',
               '2021-09-16T00:00:00.000000000', '2021-10-16T12:00:00.000000000',
               '2021-11-16T00:00:00.000000000', '2021-12-16T12:00:00.000000000',
               '2022-01-16T12:00:00.000000000', '2022-02-15T00:00:00.000000000',
               '2022-03-16T12:00:00.000000000', '2022-04-16T00:00:00.000000000',
               '2022-05-16T12:00:00.000000000', '2022-06-16T00:00:00.000000000',
               '2022-07-16T12:00:00.000000000', '2022-08-16T12:00:00.000000000',
               '2022-09-16T00:00:00.000000000', '2022-10-16T12:00:00.000000000',
               '2022-11-16T00:00:00.000000000', '2022-12-16T12:00:00.000000000'],
              dtype='datetime64[ns]')
      • mixed_layer_depth
        (time, lat, lon)
        float32
        nan nan nan ... 32.09516 32.13625
        units :
        m
        array([[[      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                ...,
                [26.433268, 26.491982, 26.526407, ..., 26.178778, 26.231415,
                 26.326649],
                [26.24937 , 26.272125, 26.274122, ..., 26.061453, 26.100391,
                 26.175747],
                [26.795238, 26.79645 , 26.766792, ..., 26.669004, 26.694345,
                 26.747482]],
        
               [[      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                ...,
                [34.60562 , 34.412   , 34.28873 , ..., 34.29361 , 34.493492,
                 34.650978],
                [34.714344, 34.44238 , 34.268047, ..., 34.41533 , 34.659706,
                 34.818394],
                [34.793133, 34.458344, 34.240494, ..., 34.520046, 34.790157,
                 34.943398]],
        
               [[      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                ...,
                [34.760757, 34.47968 , 34.358986, ..., 34.71889 , 34.976482,
                 35.012016],
                [34.573467, 34.18083 , 33.993065, ..., 34.628742, 34.932842,
                 34.936428],
                [34.3476  , 33.86288 , 33.633575, ..., 34.549816, 34.857048,
                 34.81494 ]],
        
               ...,
        
               [[      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                ...,
                [35.410255, 35.330067, 35.162006, ..., 35.705242, 35.55527 ,
                 35.42727 ],
                [35.491386, 35.424343, 35.27083 , ..., 35.761486, 35.590282,
                 35.4821  ],
                [35.49925 , 35.450314, 35.313107, ..., 35.71577 , 35.53057 ,
                 35.45492 ]],
        
               [[      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                ...,
                [38.62612 , 38.588093, 38.504982, ..., 38.829372, 38.648785,
                 38.586113],
                [38.737125, 38.697975, 38.58274 , ..., 38.957764, 38.728115,
                 38.665955],
                [38.960854, 38.92994 , 38.800632, ..., 39.167187, 38.916836,
                 38.86398 ]],
        
               [[      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                [      nan,       nan,       nan, ...,       nan,       nan,
                       nan],
                ...,
                [31.768108, 31.852535, 31.842632, ..., 31.53782 , 31.470924,
                 31.566288],
                [31.702795, 31.735882, 31.673613, ..., 31.55104 , 31.45795 ,
                 31.533722],
                [32.268738, 32.270744, 32.17937 , ..., 32.219543, 32.09516 ,
                 32.13625 ]]], dtype=float32)
    In [22]:
    mld_slab = mld_expand.interp_like(
        ds_slab.coords["time"]
    ).sel(
        lat=ds_slab.coords["lat"], lon=ds_slab.coords["lon"], method="nearest"
    )
    
    In [23]:
    mld_slab = mld_slab.compute()
    
    In [24]:
    display(mld_slab)
    
    Show/Hide data repr Show/Hide attributes
    xarray.Dataset
      • lat: 101
      • lon: 141
      • time: 239
      • lon
        (lon)
        float32
        310.0 310.5 311.0 ... 19.5 20.0
        array([310. , 310.5, 311. , 311.5, 312. , 312.5, 313. , 313.5, 314. , 314.5,
               315. , 315.5, 316. , 316.5, 317. , 317.5, 318. , 318.5, 319. , 319.5,
               320. , 320.5, 321. , 321.5, 322. , 322.5, 323. , 323.5, 324. , 324.5,
               325. , 325.5, 326. , 326.5, 327. , 327.5, 328. , 328.5, 329. , 329.5,
               330. , 330.5, 331. , 331.5, 332. , 332.5, 333. , 333.5, 334. , 334.5,
               335. , 335.5, 336. , 336.5, 337. , 337.5, 338. , 338.5, 339. , 339.5,
               340. , 340.5, 341. , 341.5, 342. , 342.5, 343. , 343.5, 344. , 344.5,
               345. , 345.5, 346. , 346.5, 347. , 347.5, 348. , 348.5, 349. , 349.5,
               350. , 350.5, 351. , 351.5, 352. , 352.5, 353. , 353.5, 354. , 354.5,
               355. , 355.5, 356. , 356.5, 357. , 357.5, 358. , 358.5, 359. , 359.5,
                 0. ,   0.5,   1. ,   1.5,   2. ,   2.5,   3. ,   3.5,   4. ,   4.5,
                 5. ,   5.5,   6. ,   6.5,   7. ,   7.5,   8. ,   8.5,   9. ,   9.5,
                10. ,  10.5,  11. ,  11.5,  12. ,  12.5,  13. ,  13.5,  14. ,  14.5,
                15. ,  15.5,  16. ,  16.5,  17. ,  17.5,  18. ,  18.5,  19. ,  19.5,
                20. ], dtype=float32)
      • lat
        (lat)
        float32
        30.0 29.5 29.0 ... -19.5 -20.0
        array([ 30. ,  29.5,  29. ,  28.5,  28. ,  27.5,  27. ,  26.5,  26. ,  25.5,
                25. ,  24.5,  24. ,  23.5,  23. ,  22.5,  22. ,  21.5,  21. ,  20.5,
                20. ,  19.5,  19. ,  18.5,  18. ,  17.5,  17. ,  16.5,  16. ,  15.5,
                15. ,  14.5,  14. ,  13.5,  13. ,  12.5,  12. ,  11.5,  11. ,  10.5,
                10. ,   9.5,   9. ,   8.5,   8. ,   7.5,   7. ,   6.5,   6. ,   5.5,
                 5. ,   4.5,   4. ,   3.5,   3. ,   2.5,   2. ,   1.5,   1. ,   0.5,
                 0. ,  -0.5,  -1. ,  -1.5,  -2. ,  -2.5,  -3. ,  -3.5,  -4. ,  -4.5,
                -5. ,  -5.5,  -6. ,  -6.5,  -7. ,  -7.5,  -8. ,  -8.5,  -9. ,  -9.5,
               -10. , -10.5, -11. , -11.5, -12. , -12.5, -13. , -13.5, -14. , -14.5,
               -15. , -15.5, -16. , -16.5, -17. , -17.5, -18. , -18.5, -19. , -19.5,
               -20. ], dtype=float32)
      • time
        (time)
        datetime64[ns]
        2021-08-06 ... 2021-09-21T11:00:00
        array(['2021-08-06T00:00:00.000000000', '2021-08-06T06:00:00.000000000',
               '2021-08-06T12:00:00.000000000', ..., '2021-09-21T06:00:00.000000000',
               '2021-09-21T09:00:00.000000000', '2021-09-21T11:00:00.000000000'],
              dtype='datetime64[ns]')
      • mixed_layer_depth
        (time, lat, lon)
        float64
        19.72 19.97 20.3 ... nan nan nan
        units :
        m
        array([[[19.71571399, 19.9650567 , 20.30213578, ...,         nan,
                         nan,         nan],
                [20.65406741, 20.96034958, 21.29580479, ...,         nan,
                         nan,         nan],
                [21.45777988, 21.84531289, 22.19177704, ...,         nan,
                         nan,         nan],
                ...,
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan]],
        
               [[19.75668501, 20.00379215, 20.33788856, ...,         nan,
                         nan,         nan],
                [20.70374423, 21.00759339, 21.33958444, ...,         nan,
                         nan,         nan],
                [21.51354551, 21.89894161, 22.24181328, ...,         nan,
                         nan,         nan],
                ...,
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan]],
        
               [[19.79765603, 20.0425276 , 20.37364135, ...,         nan,
                         nan,         nan],
                [20.75342105, 21.0548372 , 21.38336409, ...,         nan,
                         nan,         nan],
                [21.56931114, 21.95257033, 22.29184951, ...,         nan,
                         nan,         nan],
                ...,
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan]],
        
               ...,
        
               [[32.35198559, 32.6067375 , 33.03042473, ...,         nan,
                         nan,         nan],
                [33.36125758, 33.67697289, 34.05366763, ...,         nan,
                         nan,         nan],
                [33.88776629, 34.22020703, 34.51102691, ...,         nan,
                         nan,         nan],
                ...,
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan]],
        
               [[32.39880072, 32.65257064, 33.07612073, ...,         nan,
                         nan,         nan],
                [33.40764671, 33.72288076, 34.10001663, ...,         nan,
                         nan,         nan],
                [33.93484997, 34.26712862, 34.55866382, ...,         nan,
                         nan,         nan],
                ...,
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan]],
        
               [[32.43001082, 32.68312607, 33.10658473, ...,         nan,
                         nan,         nan],
                [33.4385728 , 33.75348601, 34.13091595, ...,         nan,
                         nan,         nan],
                [33.9662391 , 34.29840968, 34.59042176, ...,         nan,
                         nan,         nan],
                ...,
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan],
                [        nan,         nan,         nan, ...,         nan,
                         nan,         nan]]])

    Scale near-inertial velocities with MLD

    In [25]:
    ds_slab["u_slab"] *= ds_slab.attrs["slab_model_H"] / mld_slab.mixed_layer_depth
    ds_slab["v_slab"] *= ds_slab.attrs["slab_model_H"] / mld_slab.mixed_layer_depth
    ds_slab["umag_slab"] *= ds_slab.attrs["slab_model_H"] / mld_slab.mixed_layer_depth
    

    Max near-inertial speed over good and whole forecast period

    We'll plot the time-maximum of near-inertial speed for the good forecast period and for the whole forecast period.

    First, we construct the plot.

    In [26]:
    marker_mapping = {
        "buoy": "circle",
        "drifting buoy": "diamond",
        "vessel": "triangle",
        "poi": "asterisk",
        "glider": "cross",
    }
    
    In [27]:
    markers = hv.dim("kind").categorize(marker_mapping, default="diamond")
    
    In [28]:
    slab_umag_good_forecast_max = ds_slab["umag_slab"].sel(
        time=slice(start_of_forecast, start_of_forecast + good_forecast_time)
    ).max("time")
    slab_umag_whole_forecast_max = ds_slab["umag_slab"].sel(
        time=slice(start_of_forecast, None)
    ).max("time")
    
    slab_umag_good_forecast_max = slab_umag_good_forecast_max.assign_coords({"lon": (((slab_umag_good_forecast_max.lon + 180) % 360) - 180)})
    slab_umag_whole_forecast_max = slab_umag_whole_forecast_max.assign_coords({"lon": (((slab_umag_whole_forecast_max.lon + 180) % 360) - 180)})
    all_positions['lon'] = (((all_positions.lon + 180) % 360) - 180)
    
    near_inertial_max_plots = (
        (
            slab_umag_good_forecast_max.hvplot(
                x="lon", y="lat", z="umag_slab",
                clim=(0, 1.0),
                cmap=cmocean.cm.speed,
                frame_width=800,
                hover=False,
                geo=True, coastline=True,
                crs=ccrs.PlateCarree(), projection=ccrs.PlateCarree(),
                title="Near-inertial speed max [m/s] and mixed-layer-depth [m], good forecast period"
            )
            + slab_umag_whole_forecast_max.hvplot(
                x="lon", y="lat", z="umag_slab",
                clim=(0, 1.0),
                cmap=cmocean.cm.speed,
                frame_width=800,
                hover=False,
                geo=True, coastline=True,
                crs=ccrs.PlateCarree(), projection=ccrs.PlateCarree(),
                title="Near-inertial speed max [m/s] and mixed-layer-depth [m], whole forecast period"
            )
        ) * mld_slab.mean("time").mixed_layer_depth.hvplot.contour(
            x="lon", y="lat", geo=True, cmap="gray", hover=True,
            levels=list(range(0, 50, 10)) + list(range(60, 120, 20)), line_width=1.5, alpha=0.5
        ) * all_positions.hvplot.points(
            y="lat", x="lon", geo=True, coastline=True,
            marker=markers,
            fill_color=None, line_color="black",
            line_width=2, size=70,
            hover=True, hover_cols=["kind", "name", "leg", "lat", "lon", "time"]
        ) * db_history.hvplot.line(
            x="Longitude", y="Latitude", geo=True,
            color="gray", line_widht=1.5, alpha=0.7,
            hover=True, hover_cols=["Time", "Latitude", "Longitude"]
        ) * gv.feature.grid()
    ).cols(1)
    
    /srv/conda/envs/notebook/lib/python3.7/site-packages/xarray/core/nanops.py:142: RuntimeWarning: Mean of empty slice
      return np.nanmean(a, axis=axis, dtype=dtype)
    WARNING:param.main: geo option cannot be used with kind='line' plot type. Geographic plots are only supported for following plot types: ['bivariate', 'contour', 'contourf', 'hexbin', 'image', 'labels', 'paths', 'points', 'points', 'polygons', 'quadmesh', 'rgb', 'vectorfield']
    WARNING:param.main:geo option cannot be used with kind='line' plot type. Geographic plots are only supported for following plot types: ['bivariate', 'contour', 'contourf', 'hexbin', 'image', 'labels', 'paths', 'points', 'points', 'polygons', 'quadmesh', 'rgb', 'vectorfield']
    WARNING:param.main: line_widht option not found for line plot; similar options include: ['line_width', 'min_width', 'line_dash']
    WARNING:param.main:line_widht option not found for line plot; similar options include: ['line_width', 'min_width', 'line_dash']
    

     

    In [29]:
    display(near_inertial_max_plots)
    
    WARNING:param.GeoOverlayPlot03829: title_format is deprecated. Please use title instead
    WARNING:param.GeoOverlayPlot03829:title_format is deprecated. Please use title instead
    WARNING:param.GeoOverlayPlot03829: title_format is deprecated. Please use title instead
    WARNING:param.GeoOverlayPlot03829:title_format is deprecated. Please use title instead
    WARNING:param.GeoOverlayPlot04274: title_format is deprecated. Please use title instead
    WARNING:param.GeoOverlayPlot04274:title_format is deprecated. Please use title instead
    WARNING:param.GeoOverlayPlot04274: title_format is deprecated. Please use title instead
    WARNING:param.GeoOverlayPlot04274:title_format is deprecated. Please use title instead
    

    Near-inertial current timeseries for buoy locations

    In [30]:
    time_series_plots = []
    time_formatter = DatetimeTickFormatter(
        months='%b %Y', days='%b %d'
    )
    
    forecast_spans = (
        hv.VSpan(
            start_of_forecast, start_of_forecast + good_forecast_time
        ).opts(padding=0, color='lightgray')
        * hv.VSpan(
            start_of_forecast + good_forecast_time, None
        ).opts(padding=0, color='pink')
    )
    
    mld_slab['lon'] = (((mld_slab.lon + 180) % 360) - 180)
    ds_slab['lon'] = (((ds_slab.lon + 180) % 360) - 180)
    ds_GFS['lon'] = (((ds_slab.lon + 180) % 360) - 180)
    
    for lat, lon, name in zip(all_positions["lat"], all_positions["lon"], all_positions["name"]):
        local_mld = mld_slab.mixed_layer_depth.sel(lat=lat, lon=lon, method='nearest').mean('time').data
        name = f"{name}: {lat}N {lon}E, MLD={local_mld:.0f}m"
        all_pos_ds = ds_slab.sel(lat=lat, lon=lon, method="nearest")
        all_pos_ds["U20"] = ds_GFS["U20"].sel(lat=lat, lon=lon, method="nearest")
        all_pos_ds["V20"] = ds_GFS["V20"].sel(lat=lat, lon=lon, method="nearest")
        
        if (all_pos_ds["umag_slab"].max("time").isnull().data.compute()):
            continue
        time_series_plots.append(
            (
                (
                    forecast_spans.redim.label(y="u_slab")
                    * all_pos_ds["u_slab"].hvplot.line(label="zonal near-inertial current")
                    * all_pos_ds["v_slab"].hvplot.line(label="meridional near-inertial current")
                    * all_pos_ds["umag_slab"].hvplot.line(label="near-inertial speed")
                ).options(
                    width=800, height=160, show_grid=True,
                    xaxis=None,
                    legend_cols=False, legend_position='right',
                    ylabel="current [m/s]", title=name
                )
                + (
                    forecast_spans.redim.label(y="U20")
                    * all_pos_ds["U20"].hvplot.line(label="zonal wind (20m)")
                    * all_pos_ds["V20"].hvplot.line(label="meridional wind (20m)")
                ).options(
                    width=800, height=160, show_grid=True,
                    xformatter=time_formatter,
                    legend_cols=False, legend_position='right',
                    ylabel="wind [m/s]", xlabel=""
                )
            )
        )
    
    time_series_plots = reduce(add, time_series_plots).cols(1)
    

     

    In [31]:
    display(time_series_plots)
    

    Atmospheric conditions over forecast period

    To get a feeling for the atmospheric conditions, we'll plot sea-level pressure anomalies every 12 hours for 3 days before and throughout the whole forecast period.

    Anomalies are calculated relative to the whole data period (usually 30+14 days).

    In [32]:
    SLP = ds_GFS["SLP"].compute()
    SLP_mean = SLP.mean("time")
    SLP_anomaly = (SLP - SLP_mean)
    
    /srv/conda/envs/notebook/lib/python3.7/site-packages/xarray/core/nanops.py:142: RuntimeWarning: Mean of empty slice
      return np.nanmean(a, axis=axis, dtype=dtype)
    
    In [33]:
    plot_every = np.timedelta64(12, "h")
    max_iter = ((SLP_anomaly.coords["time"].max("time") - start_of_forecast) / plot_every).item() // 1 + 1
    
    plot_times = [
        (start_of_forecast + n * plot_every)
        for n in range(-6, int(max_iter))
    ]
    
    plots = []
    
    for plot_time in plot_times:
        title = f"SLP anomaly [hPa], {pd.Timestamp(plot_time).strftime('%Y-%m-%d %H:%M:%S UTC')}"
        if plot_time > start_of_forecast:
            title += f"\t(forecast + {(plot_time - start_of_forecast) / np.timedelta64(1, 'h')}h)"
        try:
            plots.append(
                (
                    SLP_anomaly.sel(time=plot_time, method="nearest").compute().hvplot(
                        clim=(-10, 10),
                        cmap=cmocean.cm.delta,
                        frame_width=800,
                        geo=True, coastline=True,
                        crs=ccrs.PlateCarree(), projection=ccrs.PlateCarree(),
                        hover=False
                    )
                    * all_positions.hvplot.points(
                        y="lat", x="lon", geo=True, coastline=True,
                        marker=markers,
                        fill_color=None, line_color="black",
                        line_width=2, size=70,
                        hover=True, hover_cols=["kind", "name", "leg", "lat", "lon", "time"]
                    )
                    * gv.feature.grid()
                ).opts(
                    title=title,
                    show_grid=True
                )
            )    
        except Exception as e:
            print(f"for {plot_time} I got: {e}")
        
    slp_plot = reduce(add, plots).cols(1)
    

     

    In [34]:
    display(slp_plot)
    

    In [35]:
    !echo "Finished: $(date -Ins) (UTC)"
    
    Finished: 2021-09-05T22:49:42,014892402+00:00 (UTC)